import pandas as pd
import numpy as np
import warnings
import sys
#visualization
import matplotlib.pyplot as plt
import seaborn as sns
import statsmodels.api as sm
from sktime.utils.plotting import plot_series, plot_lags, plot_correlations
#from visuals import *
#config to clean up the notebook
pd.set_option('display.max_columns', None)
pd.options.display.float_format = '{:.2f}'.format
warnings.filterwarnings('ignore')
#read the data and parse
df = pd.read_csv('sales_clean.csv')
df = df.set_index(['1', '2']).sort_index()
df.head()
df_sales = df[df['0'] == 'sales']
df_onpromotion = df[df['0'] == 'onpromotion']
df_sales.drop('0', axis=1, inplace=True)
df_sales.index.rename(['family', 'date'], level=[0,1], inplace=True)
df_sales = df_sales.unstack('family')
df_sales.columns = df_sales.columns.droplevel()
df_sales.head()
df_onpromotion.drop('0', axis=1, inplace=True)
df_onpromotion.index.rename(['family', 'date'], level=[0,1], inplace=True)
df_onpromotion = df_onpromotion.unstack('family')
df_onpromotion.columns = df_onpromotion.columns.droplevel()
#parse dates
df_sales.index = pd.to_datetime(df_sales.index)
df_onpromotion.index = pd.to_datetime(df_onpromotion.index)
from sktime.forecasting.model_selection import SlidingWindowSplitter
#separate train and test
y_train = df_sales.iloc[:-15]
y_test = df_sales.iloc[-15:]
#check skewness and kurtosis
print('Skewness : ',np.mean(y_train.skew()))
print('Kurtosis : ',np.mean(y_train.kurt()))
Skewness : 1.8029691027811172 Kurtosis : 10.976282876577864
Here we can see that the skewness and kurtosis values for the training data are very high. This indicates that we could possibly use a log transformation to even out this variance and help our predictive models. One of the problems with a log transformation, however, is it is unable to deal with zero values, and it does not handle small values well either.
The log1p transformation can handle these circumstances, and makes it a better choice for our dataset. I think we should conditionally keep the zero values at zero, because it indicates zero sales, and we want to make sure we capture that dynamic. Lets try it out and see how it affects the skewness and kurtosis
y_train_log1p = y_train.apply(lambda x: np.where(x > 0, np.log1p(x), 0))
print('Skewness after log1p : ',np.mean(y_train_log1p.skew()))
print('Kurtosis after log1p : ',np.mean(y_train_log1p.kurt()))
Skewness after log1p : 0.5123378554073319 Kurtosis after log1p : 0.8237944887158626
We can see that after the log1p transformation, the variance has stabilized substantially. This should help our predictive models learn, although we need to remember to reverse this tranformation when we make predictions by using np.expm1()
from sktime.forecasting.naive import NaiveForecaster
from sktime.forecasting.base import ForecastingHorizon
fh = ForecastingHorizon(y_test.index, is_relative=False)
forecaster = NaiveForecaster(strategy='last', sp=7)
forecaster.fit(y_train)
y_pred = forecaster.predict(fh)
from sktime.performance_metrics.forecasting import MeanSquaredError
rmse = MeanSquaredError(square_root=True)
for col in y_train.columns:
plot_series(y_train[col], y_test[col], y_pred[col], labels=['y_train', 'y_test', 'y_pred'])
print('RMSE for {}: {}'.format(col, rmse(y_test[col], y_pred[col])))
print('Overall RMSE {}'.format(rmse(y_test,y_pred)))
RMSE for AUTOMOTIVE: 1.358381001891627 RMSE for BABY CARE: 0.0969352019679599 RMSE for BEAUTY: 1.6163742516734927 RMSE for BEVERAGES: 629.8133697253157 RMSE for BOOKS: 0.02191140676732531 RMSE for BREAD/BAKERY: 54.638165398252255 RMSE for CELEBRATION: 2.3688554957858448 RMSE for CLEANING: 581.5756684629596 RMSE for DAIRY: 95.19699760691029 RMSE for DELI: 41.212282499624564 RMSE for EGGS: 20.67250901534757 RMSE for FROZEN FOODS: 12.506054721096808 RMSE for GROCERY I: 791.7376935103116 RMSE for GROCERY II: 11.5325489717349 RMSE for HARDWARE: 0.19580651176327848 RMSE for HOME AND KITCHEN I: 5.854356140667163 RMSE for HOME AND KITCHEN II: 7.427266496125442 RMSE for HOME APPLIANCES: 0.15180667666099854 RMSE for HOME CARE: 78.19120871893784 RMSE for LADIESWEAR: 1.791976582463979 RMSE for LAWN AND GARDEN: 1.8161242622125413 RMSE for LINGERIE: 2.289471234711228 RMSE for LIQUOR,WINE,BEER: 22.58739890864535 RMSE for MAGAZINES: 2.4666501000439687 RMSE for MEATS: 37.824619704418026 RMSE for PERSONAL CARE: 76.6610270107757 RMSE for PET SUPPLIES: 1.4669191549337932 RMSE for PLAYERS AND ELECTRONICS: 1.7508374533172635 RMSE for POULTRY: 45.92946889127437 RMSE for PREPARED FOODS: 6.923388492262947 RMSE for PRODUCE: 227.95699803772067 RMSE for SCHOOL AND OFFICE SUPPLIES: 47.81117891504797 RMSE for SEAFOOD: 3.299214022617051 Overall RMSE 85.35586256315877
Here we have the first and simplest iteration of a predictor, using a NaiveForecaster with a seasonal period of 7 and a strategy of "last", which of means that it takes into account the previous 7 values when making its prediction. This parameter was chosen to capture some of the weekly seasonality we observed in the data.
Here I have chosen RMSE or root mean squared error as the metric for evaluation. Advantages of this metric are the results are in the same unit as the target variable, which makes for good explainability downstream. It is also robust against 0 values, where something like MAPE or mean absolute percentage error is not. This could cause problems for us because some of the families like books for example, have 0 values in the ground truth for the test set.
We can see the results both visually and observe the RMSE. This will give us a baseline to evaluate our future iterations of modeling and prediction. Since RMSE gives us a value in the same unit as our target variable, it represents an error in sales units, so lower will be better.
It should be noted however, that in order to actually select the model that generalizes the best to unseen data, we should implement a cross-validation strategy that will iteratively fit models and then test on portions of our training set. We can record the RMSE for each of these iterations and then take an average to see how well it does. While this is not a concept specific to time-series analysis, some special consideration needs to be applied to the splitting process when dealing with a time series.
The most important thing is we need to prevent data leakage, and we need to have consecutive dates in our training data.
We will be using a k-fold cv strategy that uses and expanding window strategy. This would look something like this for 5 folds (k=5)
fold 0 : ++++++++***
fold 1 : ++++++++++++++++***
fold 2 : ++++++++++++++++++++++++***
fold 3 : ++++++++++++++++++++++++++++++++***
fold 4 : ++++++++++++++++++++++++++++++++++++++++***
from sklearn.model_selection import TimeSeriesSplit, KFold
from scipy.stats import t
#runs k fold cross validation and returns the mean of the error metric
def ts_model_cv(y_train, forecaster=None, cv=None, fh=None, metric=None, fit_fh=False):
"""
performs cross validation for model evaluation
Parameters:
y_train : the training data to use for model evaluation
forecaster : an sktime forecaster object
cv : an sktime cross validation object
fh : an sktime forecast horizon object
metric : an sktime scoring metric
fit_fh : boolean value that determines whether to pass fh to fit method
Returns:
The mean of the scoring metric across all k folds of cross validation
"""
folds = list(cv.split_loc(y_train))
results = []
for n in range(cv.get_n_splits(y_train)):
train = y_train.loc[folds[n][0]]
test = y_train.loc[folds[n][1]]
train.index.freq = 'D'
test.index.freq = 'D'
if fit_fh:
forecaster.fit(train, fh=fh)
else:
forecaster.fit(train)
y_pred = forecaster.predict(fh)
results.append(metric(np.expm1(test),np.expm1(y_pred)))
print('Fold {}: {}'.format(n, metric(test,y_pred)))
return np.mean(results)
#diebold-mariano statistical test for significance of prediction accuracy comparison
def diebold_mariano_test(forecast1, forecast2, actual):
"""
Conduct the Diebold-Mariano test to compare the accuracy of two sets of time series forecasts.
Parameters:
forecast1 (array-like): The first set of forecasts
forecast2 (array-like): The second set of forecasts
actual (array-like): The actual values of the time series
Returns:
tuple: A tuple containing the DM test statistic and p-value
"""
#compute residuals
residuals1 = actual.values - forecast1.values
residuals2 = actual.values - forecast2.values
#compute loss differential series
loss_differentials = (residuals1**2 - residuals2**2)
mean = np.mean(loss_differentials)
variance = np.var(loss_differentials)
n = loss_differentials.shape[0]
#compute autocovariance
acvf = pd.Series(np.cumsum(loss_differentials - mean), name='acvf')
standard_error = np.sqrt(variance / n)
#compute test statistic and pvalue
diebold_mariano_statistics = (acvf - mean) / standard_error
pvalue = 1 - t.cdf(np.abs(diebold_mariano_statistics), n-1)
pvalue = 2 * np.minimum(pvalue, 1-pvalue)
return diebold_mariano_statistics, pvalue
Here we can see that our RMSE across the 50 fold cross validation is a bit higher than the RMSE on our holdout test set. In real life we aren't going to have the ground truth values for the period we are interested in forecasting, so this cross validation performance will be a much more reliable metric to use to measure our models performance than comparing performance on the test set.
lets try a few more models
from sktime.forecasting.exp_smoothing import ExponentialSmoothing
from sktime.forecasting.trend import STLForecaster
from sktime.forecasting.compose import DirRecTimeSeriesRegressionForecaster, make_reduction
from sklearn.ensemble import GradientBoostingRegressor
from lightgbm import LGBMRegressor
from xgboost import XGBRegressor
from sktime.forecasting.model_selection import ExpandingWindowSplitter
model_results = {}
fh = ForecastingHorizon(np.arange(1,16))
cv = ExpandingWindowSplitter(fh=fh, initial_window=42, step_length=15)
metric = MeanSquaredError(square_root=True)
models = [
#NaiveForecaster(strategy='last', sp=7),
#ExponentialSmoothing(trend='add', seasonal='add', sp=7),
#make_reduction(LGBMRegressor(max_depth=6, n_estimators=20), window_length=13),
#make_reduction(XGBRegressor(max_depth=6, n_estimators=20), window_length=13),
#make_reduction(LGBMRegressor(max_depth=7, n_estimators=52), window_length=20),
#make_reduction(XGBRegressor(max_depth=7, n_estimators=52), window_length=20),
#make_reduction(LGBMRegressor(max_depth=5, n_estimators=29), window_length=20),
#make_reduction(LGBMRegressor(max_depth=7, n_estimators=24), window_length=17),
#STLForecaster(seasonal=7, sp=7, trend=55, robust=True),
STLForecaster(seasonal=29, sp=7, trend=67, robust=True)
]
for model in models:
model_results[str(model)] = ts_model_cv(y_train_log1p, forecaster=model, cv=cv, fh=fh, metric=metric)
Fold 0: 0.13449048553108903 Fold 1: 0.189082357044529 Fold 2: 0.15896855175179006 Fold 3: 0.1589063116261402 Fold 4: 0.18404793988640053 Fold 5: 0.23800187132502187 Fold 6: 0.15648267620871573 Fold 7: 0.26325465953136135 Fold 8: 0.10896100333787913 Fold 9: 0.16642297775755194 Fold 10: 0.29452451262647444 Fold 11: 0.2573970190086738 Fold 12: 0.2865945447185773 Fold 13: 0.18810947950514995 Fold 14: 0.1477068919076056 Fold 15: 0.1353772094889417 Fold 16: 0.13957404559988584 Fold 17: 0.17046984596290768 Fold 18: 0.1907668399703173 Fold 19: 0.18705289938963482 Fold 20: 0.21945603082596066 Fold 21: 0.2338320921964671 Fold 22: 0.16769272371540647 Fold 23: 0.16723363404495575 Fold 24: 0.11290247430688526 Fold 25: 0.146744265065268 Fold 26: 0.19001300357695686 Fold 27: 0.15703271681501388 Fold 28: 0.1533284098079394 Fold 29: 0.246640661074399 Fold 30: 0.13585718429942592 Fold 31: 0.1799024987287439 Fold 32: 0.1407799987308632 Fold 33: 0.1352930582414713 Fold 34: 0.20339084858066753 Fold 35: 0.33818578116024445 Fold 36: 0.3024792985696044 Fold 37: 0.16393263720296036 Fold 38: 0.14855656388266875 Fold 39: 0.23349333654295248 Fold 40: 0.12523563075616653 Fold 41: 0.2119930452552786 Fold 42: 0.16582016766559465 Fold 43: 0.1755093045944798 Fold 44: 0.18205690228917557 Fold 45: 0.1546042023987327 Fold 46: 0.12026746593375226 Fold 47: 0.1313080214660336 Fold 48: 0.11063841800092722 Fold 49: 0.12630786725385754
We can see that from trying a few different models and experiementing with a few hyperparameters for the STL forecaster, we have obtained a lower CV score with the STLforecaster using sp=7 and trend = 55. Recall that in the EDA we found that an sp of 28 smoothed the trend out enough to where enough of the seasonality was beiing caught in the decomposition. It turns out this is actually due to the trend hyperparameter, but by default it is set based off of the sp value. The sp value should be 7 to capture weekly seasonality, so the 55 value was calculated using the formula from the documentation based on the sp=28 value we used during decomposition.
We can also see when we check the rmse vs our hold out test set, the performance is better than with the NaiveForecaster alone. There are many hyperparameters for STLForecaster we could continue to tune, such as using different forecasters within the process to forecast each individual componenet. We can also observe that a large source of error for both models was the huge surge in "SCHOOL AND OFFICE SUPPLIES" that occurred during the test set period.
#lets output these results to CSV to use in our dashboard
#y_pred.unstack().to_csv('STLForecaster_pred.csv', index_label=[0,1,2])
Now that we have some models performance evaluated, lets work on tuning some of those models and see which ones give the best cv scores. We can use sktime's ForecastingRandomizedSearchCV to accomplish this task
from sktime.forecasting.model_selection import ForecastingRandomizedSearchCV
from sktime.forecasting.model_selection import SlidingWindowSplitter
from sktime.forecasting.model_selection import temporal_train_test_split
from sktime.forecasting.compose import make_reduction
from sklearn.ensemble import RandomForestRegressor
from sktime.forecasting.model_selection import ExpandingWindowSplitter
from lightgbm import LGBMRegressor
from sktime.forecasting.trend import STLForecaster
tuned_params = {}
#rmse = MeanSquaredError(square_root=True)
#forecaster = STLForecaster(sp = 7, robust=True)
#nested_params = {"seasonal": list(range(7,33,2)),
# "trend" : list(range(15, 75, 2))}
forecaster = make_reduction(LGBMRegressor())
nested_params = {"window_length" : list(range(7,32)),
"estimator__max_depth" : list(range(5,16)),
"estimator__n_estimators" : list(range(20,120))}
fh = ForecastingHorizon(np.arange(1,16), is_relative=True)
#cv = SlidingWindowSplitter(initial_window=60, window_length=30)
cv = ExpandingWindowSplitter(fh=fh, initial_window=42, step_length=15)
nrcv = ForecastingRandomizedSearchCV(forecaster=forecaster, strategy="refit", cv=cv,
param_distributions=nested_params,
n_iter=100, random_state=42, scoring=rmse,
error_score='raise')
for col in y_train_log1p.columns:
nrcv.fit(y_train_log1p[col])
tuned_params[col] = nrcv.best_params_
print('{} : {}'.format(col, nrcv.best_params_))
print('{} : {}'.format(col, nrcv.best_score_))
AUTOMOTIVE : {'window_length': 29, 'estimator__n_estimators': 29, 'estimator__max_depth': 6}
AUTOMOTIVE : 0.15298936167797353
BABY CARE : {'window_length': 15, 'estimator__n_estimators': 29, 'estimator__max_depth': 9}
BABY CARE : 0.08423715616967752
BEAUTY : {'window_length': 28, 'estimator__n_estimators': 60, 'estimator__max_depth': 5}
BEAUTY : 0.1738061214230093
BEVERAGES : {'window_length': 16, 'estimator__n_estimators': 71, 'estimator__max_depth': 9}
BEVERAGES : 0.14494786208317043
BOOKS : {'window_length': 25, 'estimator__n_estimators': 22, 'estimator__max_depth': 12}
BOOKS : 0.07158133463704272
BREAD/BAKERY : {'window_length': 29, 'estimator__n_estimators': 91, 'estimator__max_depth': 6}
BREAD/BAKERY : 0.09939267151130265
CELEBRATION : {'window_length': 15, 'estimator__n_estimators': 46, 'estimator__max_depth': 15}
CELEBRATION : 0.14534540575144145
CLEANING : {'window_length': 30, 'estimator__n_estimators': 60, 'estimator__max_depth': 11}
CLEANING : 0.1408606947033281
DAIRY : {'window_length': 29, 'estimator__n_estimators': 91, 'estimator__max_depth': 6}
DAIRY : 0.12342255756559219
DELI : {'window_length': 30, 'estimator__n_estimators': 32, 'estimator__max_depth': 14}
DELI : 0.12964310194596743
EGGS : {'window_length': 25, 'estimator__n_estimators': 77, 'estimator__max_depth': 6}
EGGS : 0.12415961130391667
FROZEN FOODS : {'window_length': 13, 'estimator__n_estimators': 57, 'estimator__max_depth': 12}
FROZEN FOODS : 0.21897105051203689
GROCERY I : {'window_length': 29, 'estimator__n_estimators': 29, 'estimator__max_depth': 6}
GROCERY I : 0.15520284319198333
GROCERY II : {'window_length': 31, 'estimator__n_estimators': 95, 'estimator__max_depth': 5}
GROCERY II : 0.1994255965849999
HARDWARE : {'window_length': 21, 'estimator__n_estimators': 27, 'estimator__max_depth': 5}
HARDWARE : 0.12838780063656566
HOME AND KITCHEN I : {'window_length': 18, 'estimator__n_estimators': 26, 'estimator__max_depth': 5}
HOME AND KITCHEN I : 0.20438924482404996
HOME AND KITCHEN II : {'window_length': 26, 'estimator__n_estimators': 24, 'estimator__max_depth': 14}
HOME AND KITCHEN II : 0.23134170485225217
HOME APPLIANCES : {'window_length': 19, 'estimator__n_estimators': 51, 'estimator__max_depth': 11}
HOME APPLIANCES : 0.11672082998850042
HOME CARE : {'window_length': 29, 'estimator__n_estimators': 52, 'estimator__max_depth': 8}
HOME CARE : 0.15298000200062944
LADIESWEAR : {'window_length': 21, 'estimator__n_estimators': 27, 'estimator__max_depth': 5}
LADIESWEAR : 0.18367427984177365
LAWN AND GARDEN : {'window_length': 29, 'estimator__n_estimators': 29, 'estimator__max_depth': 6}
LAWN AND GARDEN : 0.21706988839521524
LINGERIE : {'window_length': 19, 'estimator__n_estimators': 36, 'estimator__max_depth': 12}
LINGERIE : 0.17710314856725898
LIQUOR,WINE,BEER : {'window_length': 16, 'estimator__n_estimators': 81, 'estimator__max_depth': 9}
LIQUOR,WINE,BEER : 0.7166588815690556
MAGAZINES : {'window_length': 29, 'estimator__n_estimators': 29, 'estimator__max_depth': 6}
MAGAZINES : 0.300333794722799
MEATS : {'window_length': 30, 'estimator__n_estimators': 60, 'estimator__max_depth': 11}
MEATS : 0.11380282508205534
PERSONAL CARE : {'window_length': 30, 'estimator__n_estimators': 60, 'estimator__max_depth': 11}
PERSONAL CARE : 0.1805500682094707
PET SUPPLIES : {'window_length': 29, 'estimator__n_estimators': 29, 'estimator__max_depth': 6}
PET SUPPLIES : 0.15265439679129333
PLAYERS AND ELECTRONICS : {'window_length': 29, 'estimator__n_estimators': 29, 'estimator__max_depth': 6}
PLAYERS AND ELECTRONICS : 0.1788969026459452
POULTRY : {'window_length': 30, 'estimator__n_estimators': 60, 'estimator__max_depth': 11}
POULTRY : 0.1265982698289237
PREPARED FOODS : {'window_length': 28, 'estimator__n_estimators': 60, 'estimator__max_depth': 5}
PREPARED FOODS : 0.0972810603313607
PRODUCE : {'window_length': 15, 'estimator__n_estimators': 29, 'estimator__max_depth': 9}
PRODUCE : 0.10580578988635088
SCHOOL AND OFFICE SUPPLIES : {'window_length': 17, 'estimator__n_estimators': 83, 'estimator__max_depth': 5}
SCHOOL AND OFFICE SUPPLIES : 0.40656592011055787
SEAFOOD : {'window_length': 30, 'estimator__n_estimators': 32, 'estimator__max_depth': 14}
SEAFOOD : 0.142527481513129
#lets output the results to a csv to save these tuned parameters
#params = pd.DataFrame.from_dict(tuned_params)
#params.to_csv('LGBM_tuned_params.csv')
def cv_multivariate_params(y_train, params, cv=None, fh=None, metric=None, fit_fh=False):
"""
Takes a dictionary of parameter dictionaries and iterates our CV function for each model
Parameters:
y_train : training data to use for model evaluation
params : dictionary with columns as keys and dictionaries of parameter : value pairs as columns
cv : sktime cv object
fh : sktime fh object
metric : sktime scoring metric object
fit_fh : whether to pass fh to fit function
Returns:
The mean of all models cv scores across all cv folds
"""
cv_score = []
print(y_train.index.freq)
print(cv)
print(fh)
for col in y_train:
regressor = LGBMRegressor(n_estimators=params[col]['estimator__n_estimators'],
max_depth=params[col]['estimator__max_depth'])
forecaster = make_reduction(regressor, window_length=params[col]['window_length'])
cv_score.append(ts_model_cv(y_train[col], forecaster=forecaster, cv=cv, fh=fh, metric=metric, fit_fh=fit_fh))
return np.mean(cv_score)
<Day>
ExpandingWindowSplitter(fh=ForecastingHorizon([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], dtype='int64', is_relative=True),
initial_window=42, step_length=15)
ForecastingHorizon([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], dtype='int64', is_relative=True)
Fold 0: 0.21373290347360438
Fold 1: 0.2066732400371906
Fold 2: 0.12157875213773073
Fold 3: 0.1332166383130628
Fold 4: 0.07803869963173435
Fold 5: 0.2672488843398041
Fold 6: 0.1653150059569904
Fold 7: 0.18573764419049643
Fold 8: 0.12454314637686269
Fold 9: 0.09075348878100711
Fold 10: 0.1897320452418359
Fold 11: 0.1777326761967685
Fold 12: 0.28664167523133105
Fold 13: 0.16932266021348144
Fold 14: 0.13450692880317283
Fold 15: 0.22272094122257277
Fold 16: 0.08187575921738548
Fold 17: 0.17557754228618933
Fold 18: 0.08911193525520962
Fold 19: 0.08413454449487248
Fold 20: 0.08045134958877308
Fold 21: 0.21994857481700045
Fold 22: 0.19939335911927197
Fold 23: 0.1215394051172219
Fold 24: 0.17451203528317852
Fold 25: 0.1156948523687647
Fold 26: 0.1338794660008817
Fold 27: 0.13210543238522618
Fold 28: 0.19145082858881785
Fold 29: 0.19690749753516437
Fold 30: 0.10678438626103798
Fold 31: 0.1892253897517739
Fold 32: 0.16503328013498314
Fold 33: 0.08994717818269841
Fold 34: 0.07611517722402968
Fold 35: 0.3252044230510336
Fold 36: 0.19231012557499125
Fold 37: 0.1229221401062887
Fold 38: 0.12222212482074692
Fold 39: 0.19221287263825837
Fold 40: 0.12433366057141472
Fold 41: 0.18101942752676248
Fold 42: 0.15486192442213909
Fold 43: 0.16380442551111238
Fold 44: 0.1745507517381699
Fold 45: 0.19298310475041153
Fold 46: 0.10918146210299456
Fold 47: 0.07600848490849621
Fold 48: 0.06388981213067338
Fold 49: 0.06278002028505716
Fold 0: 0.06092105516093946
Fold 1: 0.10074966276454973
Fold 2: 0.07182763065499473
Fold 3: 0.10681930804639678
Fold 4: 0.1037270510277278
Fold 5: 0.09023328472105749
Fold 6: 0.09348819669604641
Fold 7: 0.029746341868066163
Fold 8: 0.010196171073928974
Fold 9: 0.05589513268142949
Fold 10: 0.07623999079399532
Fold 11: 0.06918977633148833
Fold 12: 0.0861627561468211
Fold 13: 0.09880238479478679
Fold 14: 0.1119567331973315
Fold 15: 0.15952270701936666
Fold 16: 0.10743086615120108
Fold 17: 0.07483440224190778
Fold 18: 0.07059543017571067
Fold 19: 0.1251518372377648
Fold 20: 0.08221137195589846
Fold 21: 0.12006941771971483
Fold 22: 0.04740837530443558
Fold 23: 0.0480719488569175
Fold 24: 0.08465752530134812
Fold 25: 0.08930142046671054
Fold 26: 0.08904147951228537
Fold 27: 0.1450562341034296
Fold 28: 0.08578215600106849
Fold 29: 0.07661369177324304
Fold 30: 0.08468244249404566
Fold 31: 0.07762254211511695
Fold 32: 0.09270804556143061
Fold 33: 0.09844206127552432
Fold 34: 0.11055211284707608
Fold 35: 0.0826480579800583
Fold 36: 0.06965133570858555
Fold 37: 0.06716844938722626
Fold 38: 0.09189282084175614
Fold 39: 0.12555029283408822
Fold 40: 0.06981826524094542
Fold 41: 0.07782168464797284
Fold 42: 0.0768093465166831
Fold 43: 0.0897907746073806
Fold 44: 0.05442060658511783
Fold 45: 0.08571582611691839
Fold 46: 0.07356842579077927
Fold 47: 0.06630010693576134
Fold 48: 0.07680920160426305
Fold 49: 0.06818106961258362
Fold 0: 0.20386092564622785
Fold 1: 0.2705174070111491
Fold 2: 0.15753309812604185
Fold 3: 0.19191612801840888
Fold 4: 0.16536696235019058
Fold 5: 0.27906889621831776
Fold 6: 0.23512289160700098
Fold 7: 0.2306058556041299
Fold 8: 0.14278509458529834
Fold 9: 0.15095418553785014
Fold 10: 0.14122984189303173
Fold 11: 0.135478983945864
Fold 12: 0.26736728855221165
Fold 13: 0.28623375320119193
Fold 14: 0.19864074714420127
Fold 15: 0.17517949928298795
Fold 16: 0.13865860294432392
Fold 17: 0.16391140122794953
Fold 18: 0.10890501110822634
Fold 19: 0.13604525282877605
Fold 20: 0.23554371651471287
Fold 21: 0.17808297602131026
Fold 22: 0.10497032163206198
Fold 23: 0.15965783383173085
Fold 24: 0.09393210160289661
Fold 25: 0.1568081433182141
Fold 26: 0.09210743203855315
Fold 27: 0.1580661675952365
Fold 28: 0.12327093660860826
Fold 29: 0.13385545166150672
Fold 30: 0.05843505390276267
Fold 31: 0.27807478428088495
Fold 32: 0.2151565928245863
Fold 33: 0.1567120693375555
Fold 34: 0.15214162566798484
Fold 35: 0.31898380304617036
Fold 36: 0.1827003642913808
Fold 37: 0.20438809233173272
Fold 38: 0.16318245147405058
Fold 39: 0.21693305425585088
Fold 40: 0.2288187685599881
Fold 41: 0.177689707108235
Fold 42: 0.08594967392416641
Fold 43: 0.1513164007677978
Fold 44: 0.14887177406523205
Fold 45: 0.2527841291748195
Fold 46: 0.12579253859669498
Fold 47: 0.15847010991775912
Fold 48: 0.119413873909059
Fold 49: 0.07881429605554184
Fold 0: 0.2061410053064344
Fold 1: 0.12783200123882332
Fold 2: 0.1500679271183549
Fold 3: 0.2255279305343369
Fold 4: 0.14216223792337754
Fold 5: 0.16678272253362453
Fold 6: 0.202369912721977
Fold 7: 0.17708146582155387
Fold 8: 0.19434725191938634
Fold 9: 0.13288595261795177
Fold 10: 0.1464614574774091
Fold 11: 0.21790174646405308
Fold 12: 0.22125961031553004
Fold 13: 0.13022645842413566
Fold 14: 0.12374761469000568
Fold 15: 0.09475398102633759
Fold 16: 0.0492977773245392
Fold 17: 0.12222222023427985
Fold 18: 0.2710590572854242
Fold 19: 0.21480430373289452
Fold 20: 0.09818720797021072
Fold 21: 0.13610303631813475
Fold 22: 0.1364470257992759
Fold 23: 0.09584163833559714
Fold 24: 0.0841853469648116
Fold 25: 0.12487995246169512
Fold 26: 0.13031576587631838
Fold 27: 0.1417490611415256
Fold 28: 0.13186090054279065
Fold 29: 0.14248046459359448
Fold 30: 0.10209031087840859
Fold 31: 0.18006279781376636
Fold 32: 0.10646322940049797
Fold 33: 0.11404912996086654
Fold 34: 0.10116595814947058
Fold 35: 0.28999906346337045
Fold 36: 0.19471630438357243
Fold 37: 0.11373292846554404
Fold 38: 0.16054912536984592
Fold 39: 0.16667250803989783
Fold 40: 0.17564853332616945
Fold 41: 0.11778253060397637
Fold 42: 0.10721016113454489
Fold 43: 0.1577313624097625
Fold 44: 0.12531108937537358
Fold 45: 0.09872812870730913
Fold 46: 0.1270058260615746
Fold 47: 0.08343678734924684
Fold 48: 0.12384143552376863
Fold 49: 0.062212859027170456
Fold 0: 0.0
Fold 1: 0.0
Fold 2: 0.0
Fold 3: 0.0
Fold 4: 0.0
Fold 5: 0.0
Fold 6: 0.0
Fold 7: 0.0
Fold 8: 0.0
Fold 9: 0.0
Fold 10: 0.0
Fold 11: 0.0
Fold 12: 0.0
Fold 13: 0.0
Fold 14: 0.0
Fold 15: 0.0
Fold 16: 0.0
Fold 17: 0.0
Fold 18: 0.0
Fold 19: 0.0
Fold 20: 0.0
Fold 21: 0.0
Fold 22: 0.0
Fold 23: 0.0
Fold 24: 0.0
Fold 25: 0.0
Fold 26: 0.0
Fold 27: 0.0
Fold 28: 0.0
Fold 29: 0.0
Fold 30: 0.8025921602222698
Fold 31: 0.8092009368123161
Fold 32: 0.23769296603222384
Fold 33: 0.10254276680781411
Fold 34: 0.15600396158058244
Fold 35: 0.13748549953628406
Fold 36: 0.1032139494164453
Fold 37: 0.14733378129234762
Fold 38: 0.23064244436006315
Fold 39: 0.16782395804794584
Fold 40: 0.149852752697511
Fold 41: 0.06128067200831082
Fold 42: 0.056789384947473785
Fold 43: 0.065274352124294
Fold 44: 0.04695636936360263
Fold 45: 0.07762701767661259
Fold 46: 0.06401209123851992
Fold 47: 0.07034348600918502
Fold 48: 0.06159751358019526
Fold 49: 0.030800668098138997
Fold 0: 0.15447605886825688
Fold 1: 0.14648967516684264
Fold 2: 0.06249646267246527
Fold 3: 0.1253849413982352
Fold 4: 0.06799401590448984
Fold 5: 0.09755564302532851
Fold 6: 0.06709422034303329
Fold 7: 0.17790957909462204
Fold 8: 0.18710265239953913
Fold 9: 0.060358047516149885
Fold 10: 0.08834846238554973
Fold 11: 0.18675841759234957
Fold 12: 0.17691650858199323
Fold 13: 0.15152197839079737
Fold 14: 0.09800238868066566
Fold 15: 0.09841701462700266
Fold 16: 0.06859171667347043
Fold 17: 0.08957167920700868
Fold 18: 0.11053209436500736
Fold 19: 0.09562756911425917
Fold 20: 0.10795232228615834
Fold 21: 0.09823476944178647
Fold 22: 0.07701725395935827
Fold 23: 0.06328686327748864
Fold 24: 0.03845743897990307
Fold 25: 0.06065081466915067
Fold 26: 0.09279282963351033
Fold 27: 0.10171777566238634
Fold 28: 0.08421364512318437
Fold 29: 0.05747862671064583
Fold 30: 0.05981786426156604
Fold 31: 0.11007749879712479
Fold 32: 0.07135150335840444
Fold 33: 0.052043366794460755
Fold 34: 0.07532984047565158
Fold 35: 0.1676176328872616
Fold 36: 0.1346632253921682
Fold 37: 0.09321826860624208
Fold 38: 0.060034284666339595
Fold 39: 0.15072097789033856
Fold 40: 0.07176115507280614
Fold 41: 0.1336568060824539
Fold 42: 0.12701956949547388
Fold 43: 0.13816710575351054
Fold 44: 0.07991074603667385
Fold 45: 0.08757008018141635
Fold 46: 0.07561967222762712
Fold 47: 0.049879642726415006
Fold 48: 0.056247630552056685
Fold 49: 0.08197523855650259
Fold 0: 0.19748597748525967
Fold 1: 0.12291689318610279
Fold 2: 0.16005317195771115
Fold 3: 0.09731967774021368
Fold 4: 0.13435450712750022
Fold 5: 0.16734928953330708
Fold 6: 0.14985110226225756
Fold 7: 0.09986547639077219
Fold 8: 0.098638892496304
Fold 9: 0.11682250447789702
Fold 10: 0.40710140157906344
Fold 11: 0.4945009289555171
Fold 12: 0.12090460750960078
Fold 13: 0.13583953243676328
Fold 14: 0.18665061529169952
Fold 15: 0.12033561822827961
Fold 16: 0.10137089964461052
Fold 17: 0.17193322215183265
Fold 18: 0.14189084965651383
Fold 19: 0.09954683814213611
Fold 20: 0.10784027897509843
Fold 21: 0.14759942660185166
Fold 22: 0.10601656451592331
Fold 23: 0.08864541080238893
Fold 24: 0.10345439365045059
Fold 25: 0.10975911703618181
Fold 26: 0.179152393148167
Fold 27: 0.10660643790430713
Fold 28: 0.06866712172523311
Fold 29: 0.0962757075403487
Fold 30: 0.08825117424006945
Fold 31: 0.11403231161925304
Fold 32: 0.11158969852934422
Fold 33: 0.09100144994971232
Fold 34: 0.32704786436763844
Fold 35: 0.3291660024089135
Fold 36: 0.23413511146664256
Fold 37: 0.13811451457162582
Fold 38: 0.07572648345772846
Fold 39: 0.11714866818249113
Fold 40: 0.10099487444574295
Fold 41: 0.1323859065040469
Fold 42: 0.09370838239759342
Fold 43: 0.14226117209896433
Fold 44: 0.14993527042091045
Fold 45: 0.12401473625588592
Fold 46: 0.09110347584854447
Fold 47: 0.1420820045187303
Fold 48: 0.09012244625593023
Fold 49: 0.13569988187901144
Fold 0: 0.22839007408135223
Fold 1: 0.22278458469574866
Fold 2: 0.07489711172217423
Fold 3: 0.15250817469993558
Fold 4: 0.09004145753329437
Fold 5: 0.1489456744172899
Fold 6: 0.07349747214304633
Fold 7: 0.17937600598687847
Fold 8: 0.08680909918293934
Fold 9: 0.0780355458558577
Fold 10: 0.2770012552008279
Fold 11: 0.12852445315206018
Fold 12: 0.2259820399810589
Fold 13: 0.1369781557661956
Fold 14: 0.09694827727508079
Fold 15: 0.09571792667777919
Fold 16: 0.11965267490330195
Fold 17: 0.15957760752993516
Fold 18: 0.163120700361685
Fold 19: 0.1249021810952544
Fold 20: 0.2193601863763065
Fold 21: 0.13021325264042158
Fold 22: 0.08941994996972877
Fold 23: 0.09491245610409854
Fold 24: 0.10197909724649076
Fold 25: 0.07915664870438112
Fold 26: 0.1228062011987746
Fold 27: 0.13988717945631937
Fold 28: 0.1880165228925689
Fold 29: 0.0821835275114025
Fold 30: 0.059185362300643876
Fold 31: 0.12721734593979964
Fold 32: 0.10375762159819234
Fold 33: 0.13097535213417208
Fold 34: 0.09148665772986148
Fold 35: 0.35747087392150445
Fold 36: 0.11283206459525172
Fold 37: 0.09115516275253335
Fold 38: 0.112852665136188
Fold 39: 0.17295088705846845
Fold 40: 0.13944918774271323
Fold 41: 0.14561370229845588
Fold 42: 0.0746971569263455
Fold 43: 0.18116447091935742
Fold 44: 0.15527358884419773
Fold 45: 0.1765504816179029
Fold 46: 0.22425660445306936
Fold 47: 0.14227602084961388
Fold 48: 0.13355932375444476
Fold 49: 0.19868471023149986
Fold 0: 0.18769074732569577
Fold 1: 0.1808377863718705
Fold 2: 0.08856560652277994
Fold 3: 0.15388194574513764
Fold 4: 0.08911816450988862
Fold 5: 0.13792626527777088
Fold 6: 0.10239834331131624
Fold 7: 0.17948487720985462
Fold 8: 0.10166748819817611
Fold 9: 0.09236307391042682
Fold 10: 0.2434195779364898
Fold 11: 0.17353553623833917
Fold 12: 0.28328122521506927
Fold 13: 0.14092453451468712
Fold 14: 0.10079495166315419
Fold 15: 0.11319218414116322
Fold 16: 0.09266868824943808
Fold 17: 0.12695480510538454
Fold 18: 0.08279430289302862
Fold 19: 0.12158607118938339
Fold 20: 0.0713151398153262
Fold 21: 0.11378196813477139
Fold 22: 0.06831850829773424
Fold 23: 0.05962122335853779
Fold 24: 0.08295885571339803
Fold 25: 0.07456921234134248
Fold 26: 0.11314650717593992
Fold 27: 0.12608880685185817
Fold 28: 0.07690477523248683
Fold 29: 0.08835389557890982
Fold 30: 0.08424836870675943
Fold 31: 0.12452046635991809
Fold 32: 0.15458143703413887
Fold 33: 0.09497232641282909
Fold 34: 0.09025014917229612
Fold 35: 0.3043129968181269
Fold 36: 0.20402346729004794
Fold 37: 0.1474675642932535
Fold 38: 0.04842216808276712
Fold 39: 0.18859445569675928
Fold 40: 0.10008167435233932
Fold 41: 0.12835515016156623
Fold 42: 0.08697589562178457
Fold 43: 0.1674303171251194
Fold 44: 0.11088244312635659
Fold 45: 0.12133545427494673
Fold 46: 0.102406439473951
Fold 47: 0.056175285274917656
Fold 48: 0.08829419459526348
Fold 49: 0.09965255637710814
Fold 0: 0.21960982252890007
Fold 1: 0.18760675126985485
Fold 2: 0.10021415419935395
Fold 3: 0.1290907663976834
Fold 4: 0.09370971182881063
Fold 5: 0.17402133487342925
Fold 6: 0.10818455139799223
Fold 7: 0.30413684380979283
Fold 8: 0.1866892048866019
Fold 9: 0.10515502997470923
Fold 10: 0.22747264778862636
Fold 11: 0.15529337371770707
Fold 12: 0.24847279830210198
Fold 13: 0.11129552452691233
Fold 14: 0.09339906251051623
Fold 15: 0.11602845403661931
Fold 16: 0.1199131386595271
Fold 17: 0.14895783644483124
Fold 18: 0.0850889886087237
Fold 19: 0.1124668774556143
Fold 20: 0.09304847524763676
Fold 21: 0.14283880583877484
Fold 22: 0.06912011735793774
Fold 23: 0.0972056481446414
Fold 24: 0.06343164965783878
Fold 25: 0.04498922446170621
Fold 26: 0.11388957841698436
Fold 27: 0.12688325037454357
Fold 28: 0.1376280661745265
Fold 29: 0.09691570111447856
Fold 30: 0.0774478925089575
Fold 31: 0.14752353377228472
Fold 32: 0.11484765448852294
Fold 33: 0.11756132150526535
Fold 34: 0.08174094310858082
Fold 35: 0.2841442788640519
Fold 36: 0.2624055971641158
Fold 37: 0.13177815886903296
Fold 38: 0.04819596279742579
Fold 39: 0.19298376086681618
Fold 40: 0.10636324753492937
Fold 41: 0.11523517819536637
Fold 42: 0.09421401138313942
Fold 43: 0.18282480757071692
Fold 44: 0.12101841460811451
Fold 45: 0.09521902176912396
Fold 46: 0.07374356701168466
Fold 47: 0.102521087792153
Fold 48: 0.07398771011281874
Fold 49: 0.045641557367893666
Fold 0: 0.1728105252710208
Fold 1: 0.2084571538746007
Fold 2: 0.09608155601987878
Fold 3: 0.11702067129369985
Fold 4: 0.09653463067116783
Fold 5: 0.11551257509217691
Fold 6: 0.13074682496425707
Fold 7: 0.1738777609188062
Fold 8: 0.18163719502786096
Fold 9: 0.10635942160181101
Fold 10: 0.07797013833896574
Fold 11: 0.16729089666534908
Fold 12: 0.186739900696492
Fold 13: 0.1450643044025696
Fold 14: 0.12234919754850579
Fold 15: 0.07283518456732094
Fold 16: 0.07358107705780083
Fold 17: 0.12093145147823264
Fold 18: 0.09748939773016663
Fold 19: 0.1035078488936957
Fold 20: 0.08594991484414297
Fold 21: 0.1235678339211749
Fold 22: 0.0985475581978802
Fold 23: 0.12570728822304558
Fold 24: 0.043950305846472
Fold 25: 0.10209697537049942
Fold 26: 0.14097690814070418
Fold 27: 0.10722753941088123
Fold 28: 0.08038879218739062
Fold 29: 0.13376325870461847
Fold 30: 0.08778081624348101
Fold 31: 0.16658342855384475
Fold 32: 0.1710318993539491
Fold 33: 0.10866376590446754
Fold 34: 0.08756380862543191
Fold 35: 0.13752886931515357
Fold 36: 0.18934560107208478
Fold 37: 0.09878069255939984
Fold 38: 0.08272402959404254
Fold 39: 0.2429939159644923
Fold 40: 0.15224150725510352
Fold 41: 0.12901733569688237
Fold 42: 0.1418114774253532
Fold 43: 0.15644403883893504
Fold 44: 0.12039848748824353
Fold 45: 0.12094765605945328
Fold 46: 0.10547027184714577
Fold 47: 0.12748186067557574
Fold 48: 0.08981832457457167
Fold 49: 0.08237869118703331
Fold 0: 0.24805725056781777
Fold 1: 0.11971702368540225
Fold 2: 0.18503691611478917
Fold 3: 0.11859840080997347
Fold 4: 0.07183901383366427
Fold 5: 0.19579100166995475
Fold 6: 0.1405589006532059
Fold 7: 0.17531616666990588
Fold 8: 0.1411815933827058
Fold 9: 0.5204565906833115
Fold 10: 1.4726378104307782
Fold 11: 1.3640838768784096
Fold 12: 0.22229109044845613
Fold 13: 0.13707752897649814
Fold 14: 0.10549437158800895
Fold 15: 0.07847113137221406
Fold 16: 0.07160208344798864
Fold 17: 0.13361682980394904
Fold 18: 0.06890810621818876
Fold 19: 0.12179690977113976
Fold 20: 0.24072819262028936
Fold 21: 0.13209652807912534
Fold 22: 0.11109329459634798
Fold 23: 0.0898243954923318
Fold 24: 0.09341384751273811
Fold 25: 0.13149349152003498
Fold 26: 0.11212236034395405
Fold 27: 0.12113598603050485
Fold 28: 0.1127082046149021
Fold 29: 0.09673406172532152
Fold 30: 0.04896216256978934
Fold 31: 0.18161166043960222
Fold 32: 0.1544357475836406
Fold 33: 0.213868245564004
Fold 34: 1.0299297032272305
Fold 35: 0.5186259919776375
Fold 36: 0.3300362979006318
Fold 37: 0.11295177647918114
Fold 38: 0.06683955660288332
Fold 39: 0.17406709322671945
Fold 40: 0.11936571381210535
Fold 41: 0.07303593450144136
Fold 42: 0.0692889734632753
Fold 43: 0.19185584201597777
Fold 44: 0.2756818738523745
Fold 45: 0.13845272609818815
Fold 46: 0.11386550566450294
Fold 47: 0.09814930582784462
Fold 48: 0.050330998737479135
Fold 49: 0.05331445651542264
Fold 0: 0.20653031984391212
Fold 1: 0.20679556163198134
Fold 2: 0.11583249876612624
Fold 3: 0.16347433640963857
Fold 4: 0.10897327745493297
Fold 5: 0.18574484958018628
Fold 6: 0.11018848596583329
Fold 7: 0.1405848110530511
Fold 8: 0.07826068432702138
Fold 9: 0.13493954479021697
Fold 10: 0.40395044223031523
Fold 11: 0.28610530577387566
Fold 12: 0.3442652231632219
Fold 13: 0.14754113818659864
Fold 14: 0.07945086266520943
Fold 15: 0.10172654503821037
Fold 16: 0.17420015511058817
Fold 17: 0.1516029180735235
Fold 18: 0.45602085932849595
Fold 19: 0.20176874650705823
Fold 20: 0.19615887750829417
Fold 21: 0.15113627972238766
Fold 22: 0.11570132603790916
Fold 23: 0.18459255143885125
Fold 24: 0.08004417969948834
Fold 25: 0.07695672990151774
Fold 26: 0.12231945622774046
Fold 27: 0.14152519687034715
Fold 28: 0.15008087263700914
Fold 29: 0.11546243954990026
Fold 30: 0.08305597667678687
Fold 31: 0.12061129193427093
Fold 32: 0.09481235782422616
Fold 33: 0.17111294842813404
Fold 34: 0.193148339220603
Fold 35: 0.2942384271320273
Fold 36: 0.24350056289136224
Fold 37: 0.08560253487141724
Fold 38: 0.06899661795895759
Fold 39: 0.14963251032205094
Fold 40: 0.13872509756466944
Fold 41: 0.17262978811844953
Fold 42: 0.10317490919185678
Fold 43: 0.15840538164698903
Fold 44: 0.10899577933725224
Fold 45: 0.097848672587305
Fold 46: 0.10996436254339254
Fold 47: 0.0908488880586062
Fold 48: 0.09534381461537901
Fold 49: 0.04755942318198768
Fold 0: 0.1494902615543869
Fold 1: 0.1513271467335677
Fold 2: 0.10619898093630444
Fold 3: 0.13427534084990642
Fold 4: 0.08568286478808662
Fold 5: 0.12178624989269415
Fold 6: 0.13752658455034492
Fold 7: 0.18289210181043095
Fold 8: 0.14233382227591018
Fold 9: 0.3774722804512118
Fold 10: 0.631073119054102
Fold 11: 0.5118871878118776
Fold 12: 0.2104220721625956
Fold 13: 0.13379036331278696
Fold 14: 0.13753891886840858
Fold 15: 0.10795274696983238
Fold 16: 0.08950505011082151
Fold 17: 0.1322590295339531
Fold 18: 0.07346981781809493
Fold 19: 0.21588495766431837
Fold 20: 0.2319799314953666
Fold 21: 0.1420799560369734
Fold 22: 0.16458427959229222
Fold 23: 0.11315847463897402
Fold 24: 0.4942733721034173
Fold 25: 0.2823770492546426
Fold 26: 0.36517096873699406
Fold 27: 0.09914153730540037
Fold 28: 0.09627142432132255
Fold 29: 0.07899657186610058
Fold 30: 0.0898971568926811
Fold 31: 0.15337550443984507
Fold 32: 0.10852370827261214
Fold 33: 0.09090719051098434
Fold 34: 0.4102729215695067
Fold 35: 0.2692411472939707
Fold 36: 0.46484815815301544
Fold 37: 0.13348810627043775
Fold 38: 0.13389620845280842
Fold 39: 0.17960139776026687
Fold 40: 0.12697319255536182
Fold 41: 0.09817543744935708
Fold 42: 0.08751385884720445
Fold 43: 0.20960454109786256
Fold 44: 0.5375385952042411
Fold 45: 0.24624193788079246
Fold 46: 0.16031197725721547
Fold 47: 0.09541878712206915
Fold 48: 0.18566848847214507
Fold 49: 0.28897905124649886
Fold 0: 0.0999119271200602
Fold 1: 0.15072268312960327
Fold 2: 0.10949657328402668
Fold 3: 0.12027429849086543
Fold 4: 0.13421816388584093
Fold 5: 0.07541641035402334
Fold 6: 0.08283809909227131
Fold 7: 0.1331818181843706
Fold 8: 0.13070712804029183
Fold 9: 0.15815364503358917
Fold 10: 0.11195889897481211
Fold 11: 0.09590619035614731
Fold 12: 0.20894647937999036
Fold 13: 0.12697279005700596
Fold 14: 0.1433095567973755
Fold 15: 0.0939007542111773
Fold 16: 0.1286755928718547
Fold 17: 0.10368477181080654
Fold 18: 0.17309072075164525
Fold 19: 0.11849282587315656
Fold 20: 0.1546368626466053
Fold 21: 0.11302786018695594
Fold 22: 0.10946703967117946
Fold 23: 0.17015401952591808
Fold 24: 0.14572978624693989
Fold 25: 0.08487343771741859
Fold 26: 0.11065142494759399
Fold 27: 0.1760488488340223
Fold 28: 0.20558740979064613
Fold 29: 0.16740698411281688
Fold 30: 0.08421545971596656
Fold 31: 0.1766434885519753
Fold 32: 0.120124905255189
Fold 33: 0.13231083037062324
Fold 34: 0.10589851695917421
Fold 35: 0.15795951829863367
Fold 36: 0.15930179270867326
Fold 37: 0.11495010610265126
Fold 38: 0.09531974001778441
Fold 39: 0.10031247157776592
Fold 40: 0.08372353015242369
Fold 41: 0.1353170500973751
Fold 42: 0.1775025827984989
Fold 43: 0.09839734999261389
Fold 44: 0.12169287602023111
Fold 45: 0.1561675642470047
Fold 46: 0.1209884435778744
Fold 47: 0.11506499733514877
Fold 48: 0.13000456284526568
Fold 49: 0.09605124382439885
Fold 0: 0.1993622553244307
Fold 1: 0.2249397312766014
Fold 2: 0.12793102031800077
Fold 3: 0.150954503818591
Fold 4: 0.20915813766705782
Fold 5: 0.3141385170101611
Fold 6: 0.16011970995851432
Fold 7: 0.2350496003111154
Fold 8: 0.25528889761094625
Fold 9: 0.15746009179155634
Fold 10: 0.44077668504831496
Fold 11: 0.354228782186239
Fold 12: 0.2606800199248189
Fold 13: 0.2597719890943537
Fold 14: 0.2561816535119413
Fold 15: 0.17905213800947345
Fold 16: 0.15296348399082652
Fold 17: 0.16061682268207134
Fold 18: 0.13445686479586216
Fold 19: 0.21009767097954724
Fold 20: 0.1366104875580849
Fold 21: 0.13903921556639248
Fold 22: 0.11845870271045598
Fold 23: 0.36364809739400644
Fold 24: 0.1913416814166763
Fold 25: 0.1612306055227074
Fold 26: 0.17892644323678605
Fold 27: 0.1640558880352896
Fold 28: 0.12413003464348331
Fold 29: 0.1810718109396298
Fold 30: 0.13046413797535472
Fold 31: 0.17485847557650377
Fold 32: 0.13671421122205263
Fold 33: 0.1309857856725361
Fold 34: 0.32899192572081043
Fold 35: 0.41050060778744646
Fold 36: 0.19704728502108473
Fold 37: 0.40802736101331666
Fold 38: 0.16893090471507305
Fold 39: 0.35883825221445853
Fold 40: 0.2580069361983393
Fold 41: 0.13865055565701492
Fold 42: 0.08120828110856802
Fold 43: 0.20302926604991012
Fold 44: 0.13769612010733528
Fold 45: 0.1615487607600582
Fold 46: 0.14816367147499981
Fold 47: 0.22186964200272563
Fold 48: 0.07522875713942459
Fold 49: 0.1469597614515498
Fold 0: 0.3595623411690266
Fold 1: 0.20412281952324351
Fold 2: 0.1688739724861319
Fold 3: 0.14395231299942493
Fold 4: 0.16525330998153168
Fold 5: 0.593679799255069
Fold 6: 0.5196305767039529
Fold 7: 0.9044225701747383
Fold 8: 0.28809878969650266
Fold 9: 0.2057162842764531
Fold 10: 0.23506696740250022
Fold 11: 0.5852118758196005
Fold 12: 0.4259508304213676
Fold 13: 0.22774014360903508
Fold 14: 0.20584247103771008
Fold 15: 0.17850178538142036
Fold 16: 0.22755694440788538
Fold 17: 0.15620720285383408
Fold 18: 0.16779672011653035
Fold 19: 0.21483495682810028
Fold 20: 0.21375336508339346
Fold 21: 0.1954317783584159
Fold 22: 0.19774286402497843
Fold 23: 0.1352564543695545
Fold 24: 0.29568010942982187
Fold 25: 0.21510105053468598
Fold 26: 0.22369202898776452
Fold 27: 0.13620923885722092
Fold 28: 0.16657928295481167
Fold 29: 0.12737883256942278
Fold 30: 0.17203528638708204
Fold 31: 0.15452976095470242
Fold 32: 0.24834107513162815
Fold 33: 0.13505353180649157
Fold 34: 0.17341124737321273
Fold 35: 0.23254276267517718
Fold 36: 0.2075544785393606
Fold 37: 0.11175562804957627
Fold 38: 0.13912020603179398
Fold 39: 0.18200595165129455
Fold 40: 0.1735249680709495
Fold 41: 0.14932853773381025
Fold 42: 0.11938496743903869
Fold 43: 0.19354574138968944
Fold 44: 0.13671955800818308
Fold 45: 0.2985788657690175
Fold 46: 0.19187315354892912
Fold 47: 0.17306194277638776
Fold 48: 0.1514937759442253
Fold 49: 0.1383761240179293
Fold 0: 0.025166772085160948
Fold 1: 0.1860631140708778
Fold 2: 0.32783850359547984
Fold 3: 0.11375659073194
Fold 4: 0.1435620648050694
Fold 5: 0.22553816614972108
Fold 6: 0.10418364555302545
Fold 7: 0.13375254388374047
Fold 8: 0.11771281414007947
Fold 9: 0.13588037798468802
Fold 10: 0.07586646706839593
Fold 11: 0.09675449896461322
Fold 12: 0.07686839280663606
Fold 13: 0.10811065749523088
Fold 14: 0.08593269408699074
Fold 15: 0.12798929344620244
Fold 16: 0.15753233644096884
Fold 17: 0.06795386637163991
Fold 18: 0.09675440351536736
Fold 19: 0.1500243650677801
Fold 20: 0.07650548759111753
Fold 21: 0.12720491905118997
Fold 22: 0.08678840723906356
Fold 23: 0.17060016955774368
Fold 24: 0.046181838017151026
Fold 25: 0.05081353847016717
Fold 26: 0.11741483987023478
Fold 27: 0.13735809729801976
Fold 28: 0.06732915676753634
Fold 29: 0.07828191166282532
Fold 30: 0.14470283971902717
Fold 31: 0.07904739060262603
Fold 32: 0.10078853047022551
Fold 33: 0.06865918588786131
Fold 34: 0.09515851457856651
Fold 35: 0.09969996535080859
Fold 36: 0.14866101573435025
Fold 37: 0.22379333603847923
Fold 38: 0.08973631314978414
Fold 39: 0.12809026665194106
Fold 40: 0.13399122356404466
Fold 41: 0.17184626732429042
Fold 42: 0.10064544123727606
Fold 43: 0.13174270764111215
Fold 44: 0.12811740306720215
Fold 45: 0.0946957841362773
Fold 46: 0.0747911874258834
Fold 47: 0.08647211340685605
Fold 48: 0.09188566787384599
Fold 49: 0.12779641177590695
Fold 0: 0.25766903509656325
Fold 1: 0.2696608634933954
Fold 2: 0.12953395501529041
Fold 3: 0.15779325674965083
Fold 4: 0.07670307273917197
Fold 5: 0.21472427806027214
Fold 6: 0.11232470207820867
Fold 7: 0.275898170988138
Fold 8: 0.11063010699121714
Fold 9: 0.09832995304755393
Fold 10: 0.19613165568689378
Fold 11: 0.15923699272928438
Fold 12: 0.1844637257187451
Fold 13: 0.2031567415910473
Fold 14: 0.16013653981515089
Fold 15: 0.12995101792685634
Fold 16: 0.06810063288959749
Fold 17: 0.18095326953946475
Fold 18: 0.34633687338076263
Fold 19: 0.16712431699344094
Fold 20: 0.21094452762694693
Fold 21: 0.1773909141499756
Fold 22: 0.11398901821571635
Fold 23: 0.10733549368050935
Fold 24: 0.06510669536686087
Fold 25: 0.052692467089811366
Fold 26: 0.14644079979075164
Fold 27: 0.10833313474078583
Fold 28: 0.16054826844853307
Fold 29: 0.1201455174656899
Fold 30: 0.07752655045354433
Fold 31: 0.15187147070048554
Fold 32: 0.11800531000111811
Fold 33: 0.16321923164882046
Fold 34: 0.10895175740070165
Fold 35: 0.3547361332176746
Fold 36: 0.13803082176647954
Fold 37: 0.15172109127714295
Fold 38: 0.1336284071962091
Fold 39: 0.21493216991516384
Fold 40: 0.08098190549642098
Fold 41: 0.15704189338109872
Fold 42: 0.09885500599313804
Fold 43: 0.18266545480208554
Fold 44: 0.126034432880294
Fold 45: 0.1631892792199271
Fold 46: 0.11303195268851433
Fold 47: 0.10771521269327672
Fold 48: 0.11571842113034503
Fold 49: 0.1293576010627447
Fold 0: 0.27797014597598413
Fold 1: 0.2973995480801557
Fold 2: 0.16170829034758757
Fold 3: 0.281771092025197
Fold 4: 0.23016387954271575
Fold 5: 0.19963874118425404
Fold 6: 0.22358009396058345
Fold 7: 0.27936109062939984
Fold 8: 0.26273876410645786
Fold 9: 0.12786041328966924
Fold 10: 0.160031626212596
Fold 11: 0.21051662958415335
Fold 12: 0.362107994526786
Fold 13: 0.2624040422160561
Fold 14: 0.1831693456602011
Fold 15: 0.20951287668712307
Fold 16: 0.14439723282500808
Fold 17: 0.1452661823091598
Fold 18: 0.1715655770273443
Fold 19: 0.15611492475338345
Fold 20: 0.13749696732979513
Fold 21: 0.19044629876274008
Fold 22: 0.1344042104585285
Fold 23: 0.12628304020255163
Fold 24: 0.1163569241052949
Fold 25: 0.08932972175994625
Fold 26: 0.22250259507914488
Fold 27: 0.2503055375858338
Fold 28: 0.12484771841341884
Fold 29: 0.1477351128741182
Fold 30: 0.10434807023957195
Fold 31: 0.15852971856608986
Fold 32: 0.12496092092319734
Fold 33: 0.1238219581656788
Fold 34: 0.14924213854561133
Fold 35: 0.3309485896181679
Fold 36: 0.26669197224193386
Fold 37: 0.1700335806604402
Fold 38: 0.10035460474545774
Fold 39: 0.3419348110919035
Fold 40: 0.19330285178244327
Fold 41: 0.17073474700313923
Fold 42: 0.08991180594020634
Fold 43: 0.2398232465447764
Fold 44: 0.14073785947800038
Fold 45: 0.20148885081828966
Fold 46: 0.0991718047507405
Fold 47: 0.06602768044753725
Fold 48: 0.10006987669103708
Fold 49: 0.12459228631927202
Fold 0: 0.2200778463870612
Fold 1: 0.19477903113913547
Fold 2: 0.11436540313246994
Fold 3: 0.09080773722843775
Fold 4: 0.11766900824808021
Fold 5: 0.17109907242003633
Fold 6: 0.1064989090100867
Fold 7: 0.323264415279305
Fold 8: 0.14591196967252518
Fold 9: 0.19090826600854688
Fold 10: 0.22336664152172042
Fold 11: 0.2940389107340375
Fold 12: 0.16081539234763836
Fold 13: 0.13078623637278874
Fold 14: 0.2332848181978508
Fold 15: 0.2029088262541999
Fold 16: 0.12711823288342355
Fold 17: 0.129255447498249
Fold 18: 0.11816825865562515
Fold 19: 0.2576850005966845
Fold 20: 0.14871918670483944
Fold 21: 0.1792887851138789
Fold 22: 0.2228822698731842
Fold 23: 0.13140224898403433
Fold 24: 0.08365379127933088
Fold 25: 0.13191655276094766
Fold 26: 0.21141229058189795
Fold 27: 0.10932951379165189
Fold 28: 0.11035123666387532
Fold 29: 0.08946666442410318
Fold 30: 0.06682239547167373
Fold 31: 0.2669285796060556
Fold 32: 0.12018630741620434
Fold 33: 0.5003129991400136
Fold 34: 0.9130372887772965
Fold 35: 0.3734235556841201
Fold 36: 0.2541735629255024
Fold 37: 0.08689554705367974
Fold 38: 0.7098268079607812
Fold 39: 0.3757770628948242
Fold 40: 0.2510483494105848
Fold 41: 0.20648491948764627
Fold 42: 0.13901054877016447
Fold 43: 0.23364653377697644
Fold 44: 0.649056994677021
Fold 45: 0.13473529765864983
Fold 46: 0.13782956662846704
Fold 47: 0.25547216728444083
Fold 48: 0.1334685843036568
Fold 49: 0.07412538706735797
Fold 0: 0.12556190482325538
Fold 1: 0.21023461458621
Fold 2: 0.11550484426805678
Fold 3: 0.1825631120902709
Fold 4: 0.20991501798400228
Fold 5: 0.13923025235105363
Fold 6: 0.21049818932460831
Fold 7: 0.162478773898952
Fold 8: 0.2870886907633503
Fold 9: 0.11984357818633994
Fold 10: 0.1847292280182879
Fold 11: 0.2287507973819963
Fold 12: 0.22068464912154773
Fold 13: 0.2012342836544986
Fold 14: 0.1219965683534469
Fold 15: 0.16078904157700408
Fold 16: 0.18137765087636482
Fold 17: 0.16820001620674252
Fold 18: 0.1056047532313627
Fold 19: 0.23695025117335902
Fold 20: 0.4910013148883267
Fold 21: 0.2706230271076288
Fold 22: 0.11691325115342811
Fold 23: 0.16333986154479396
Fold 24: 0.1990156507111815
Fold 25: 0.14061676324492042
Fold 26: 0.14471664283420158
Fold 27: 0.20674230047151126
Fold 28: 0.1611054349507723
Fold 29: 0.16210277854469787
Fold 30: 0.10171494017335202
Fold 31: 0.16595042091129378
Fold 32: 0.14079240388914563
Fold 33: 0.13336245359103116
Fold 34: 0.11165704993167605
Fold 35: 0.2870744238731473
Fold 36: 0.27626704091283477
Fold 37: 0.2202737800656924
Fold 38: 0.10052047888421858
Fold 39: 0.1920819350034256
Fold 40: 0.17864563361466965
Fold 41: 0.10509681738739858
Fold 42: 0.08263372298529448
Fold 43: 0.3564070650918967
Fold 44: 0.10407119485426176
Fold 45: 0.1440919162252081
Fold 46: 0.11053316873410925
Fold 47: 0.1286784735140682
Fold 48: 0.16003637028922185
Fold 49: 0.12585489510883044
Fold 0: 1.5740578015773226
Fold 1: 1.3461298933138912
Fold 2: 1.080085766673001
Fold 3: 0.613370135378901
Fold 4: 0.5113872309906826
Fold 5: 0.8649818431703512
Fold 6: 0.5150343282622879
Fold 7: 1.7424634041278757
Fold 8: 1.6592836759463503
Fold 9: 0.6318352123686143
Fold 10: 1.147665936499405
Fold 11: 0.8237255208413549
Fold 12: 0.7010445736362316
Fold 13: 1.4303650960614958
Fold 14: 1.5370167167286057
Fold 15: 0.48678093492891994
Fold 16: 0.22567484848594857
Fold 17: 0.4116594041701881
Fold 18: 0.21248192227058163
Fold 19: 0.41999436333758583
Fold 20: 0.8833383415803171
Fold 21: 0.5436384645303364
Fold 22: 0.7652135252693681
Fold 23: 0.5330370467741724
Fold 24: 0.6494208117497738
Fold 25: 0.6725300182979133
Fold 26: 0.3189490383479233
Fold 27: 0.5033974921910176
Fold 28: 0.5416544522702771
Fold 29: 0.33662734337297173
Fold 30: 0.36877918859433695
Fold 31: 0.3935105050192719
Fold 32: 0.3471588515216746
Fold 33: 0.20550682341304197
Fold 34: 0.43283119347919247
Fold 35: 0.5155083658833608
Fold 36: 1.0483299627656937
Fold 37: 0.19049922003034772
Fold 38: 0.2606101221991657
Fold 39: 1.9055112737402444
Fold 40: 1.5406674914915257
Fold 41: 1.7462287145667956
Fold 42: 1.43349991836472
Fold 43: 0.3493048860289373
Fold 44: 0.38659056060334285
Fold 45: 0.2038261523334019
Fold 46: 0.16177993555082595
Fold 47: 0.17246696880412435
Fold 48: 0.2651881332657742
Fold 49: 0.2223006676433291
Fold 0: 0.666665730049995
Fold 1: 0.3455244705783633
Fold 2: 0.2656804202245407
Fold 3: 0.19995362511857556
Fold 4: 0.1505810283472706
Fold 5: 0.7562568664470806
Fold 6: 0.7893070679896353
Fold 7: 0.41462014047653367
Fold 8: 0.2505915299936193
Fold 9: 0.3499358295802198
Fold 10: 0.7386847762717567
Fold 11: 0.7555057936466881
Fold 12: 0.6309971895516902
Fold 13: 0.24598078383868982
Fold 14: 0.2574720217057732
Fold 15: 0.4652815411181328
Fold 16: 0.209006717858399
Fold 17: 0.3616798737467341
Fold 18: 0.24910239176476578
Fold 19: 0.4482536959669513
Fold 20: 0.19193386615446187
Fold 21: 0.3244764161533385
Fold 22: 0.14935943498894844
Fold 23: 0.11845277610901954
Fold 24: 0.2529873987225728
Fold 25: 0.1744917619323024
Fold 26: 0.24198989466791232
Fold 27: 0.2167672368141846
Fold 28: 0.11127813195795477
Fold 29: 0.14365817807618736
Fold 30: 0.16357314623071653
Fold 31: 0.29359098795641725
Fold 32: 0.1706029459888627
Fold 33: 0.14419713327113057
Fold 34: 0.2667024265327495
Fold 35: 0.2869350742501239
Fold 36: 0.38979732337693174
Fold 37: 0.15341660771665935
Fold 38: 0.26867661970018286
Fold 39: 0.3467253919190143
Fold 40: 0.23963681423968977
Fold 41: 0.19137690194771376
Fold 42: 0.30442326744569215
Fold 43: 0.19603884130420082
Fold 44: 0.22958915613846204
Fold 45: 0.14270106620397846
Fold 46: 0.23259178680043605
Fold 47: 0.2200831951491293
Fold 48: 0.1751983836577589
Fold 49: 0.12435607645780124
Fold 0: 0.19547022946861678
Fold 1: 0.16536137758859124
Fold 2: 0.08279300474195099
Fold 3: 0.10196859372657907
Fold 4: 0.06472203671653257
Fold 5: 0.1206721283484683
Fold 6: 0.061084240036461665
Fold 7: 0.15368721944708738
Fold 8: 0.08262746264911394
Fold 9: 0.06335613712106698
Fold 10: 0.13457126404842767
Fold 11: 0.12206294359037066
Fold 12: 0.12069402894429213
Fold 13: 0.12713570959794407
Fold 14: 0.07588762020243466
Fold 15: 0.08071928440338841
Fold 16: 0.10653066442048977
Fold 17: 0.16705048873553593
Fold 18: 0.11334381004810883
Fold 19: 0.13781638127360962
Fold 20: 0.0842409158021847
Fold 21: 0.12035535584181559
Fold 22: 0.09018694205583398
Fold 23: 0.16646925691319828
Fold 24: 0.07251641597442951
Fold 25: 0.10504972565714357
Fold 26: 0.11155842151712521
Fold 27: 0.11532667514516344
Fold 28: 0.09913253924766426
Fold 29: 0.09076795962149199
Fold 30: 0.3930981737641656
Fold 31: 0.1461589877657616
Fold 32: 0.06954130419057855
Fold 33: 0.05774841032513785
Fold 34: 0.08509705362127397
Fold 35: 0.17831534379845868
Fold 36: 0.11148620993890211
Fold 37: 0.11439969039852052
Fold 38: 0.09783476524762702
Fold 39: 0.1324008099010724
Fold 40: 0.07534439339885884
Fold 41: 0.1036345442727028
Fold 42: 0.1745538341491402
Fold 43: 0.11365995951575007
Fold 44: 0.10183437680930359
Fold 45: 0.06178273875071084
Fold 46: 0.12375386020576375
Fold 47: 0.0788723052887575
Fold 48: 0.0729663031579391
Fold 49: 0.064499356717222
Fold 0: 0.31724897334700974
Fold 1: 0.2770128510748757
Fold 2: 0.0831922439017115
Fold 3: 0.14891598912410414
Fold 4: 0.09813789790963005
Fold 5: 0.2263669940507157
Fold 6: 0.18227546862512053
Fold 7: 0.23791424008958495
Fold 8: 0.11215643657057286
Fold 9: 0.08329628216121933
Fold 10: 0.26697820705077374
Fold 11: 0.20166938002001436
Fold 12: 0.39047306421702244
Fold 13: 0.23855179269700375
Fold 14: 0.18471153577881408
Fold 15: 0.13171879631690883
Fold 16: 0.19903258552122438
Fold 17: 0.17263395929267727
Fold 18: 0.5344343046545844
Fold 19: 0.1567069877571866
Fold 20: 0.25059003336794405
Fold 21: 0.2281885186350427
Fold 22: 0.1896325664866587
Fold 23: 0.14176281266112759
Fold 24: 0.12826553187852602
Fold 25: 0.12937882807354964
Fold 26: 0.14709352009643029
Fold 27: 0.17406650974630164
Fold 28: 0.11971392777500106
Fold 29: 0.11738318374433623
Fold 30: 0.174191199478836
Fold 31: 0.15009687480165093
Fold 32: 0.16862026715083592
Fold 33: 0.14253520790783672
Fold 34: 0.0923356463868664
Fold 35: 0.27903372483582284
Fold 36: 0.14932593476643927
Fold 37: 0.26138408490318904
Fold 38: 0.21052391251229358
Fold 39: 0.1768019836809176
Fold 40: 0.20515055794416204
Fold 41: 0.13698905538653672
Fold 42: 0.12138010833551426
Fold 43: 0.1604264595326392
Fold 44: 0.15540704849109552
Fold 45: 0.1398041706117247
Fold 46: 0.12308537467702241
Fold 47: 0.09554119779536525
Fold 48: 0.0777946044865874
Fold 49: 0.13757257416252552
Fold 0: 0.16831802079060762
Fold 1: 0.1828462646829803
Fold 2: 0.15129586775731285
Fold 3: 0.19283791520026633
Fold 4: 0.23175975834295398
Fold 5: 0.17793101086126023
Fold 6: 0.1298377061558373
Fold 7: 0.26615818615389425
Fold 8: 0.14942002466638443
Fold 9: 0.14495203315714875
Fold 10: 0.1504495740340106
Fold 11: 0.17235324953533673
Fold 12: 0.16721207556503886
Fold 13: 0.16284259008409357
Fold 14: 0.14203075738261836
Fold 15: 0.16751615636834377
Fold 16: 0.13245619827514937
Fold 17: 0.17761285039932936
Fold 18: 0.11368284810290899
Fold 19: 0.14290537416249333
Fold 20: 0.10231619314010303
Fold 21: 0.13754183461225294
Fold 22: 0.15793765082755626
Fold 23: 0.08803180076463454
Fold 24: 0.09593195803446324
Fold 25: 0.07592945807288098
Fold 26: 0.1299857685590868
Fold 27: 0.11772008146528534
Fold 28: 0.08243950213254149
Fold 29: 0.1552129372882187
Fold 30: 0.10658301490478211
Fold 31: 0.19742036707141564
Fold 32: 0.13061611168585446
Fold 33: 0.12925668248346925
Fold 34: 0.17603974005916725
Fold 35: 0.30121813543003734
Fold 36: 0.2687540187887054
Fold 37: 0.14731466731020962
Fold 38: 0.09942129626029644
Fold 39: 0.18110131007555574
Fold 40: 0.14872778451965846
Fold 41: 0.15021887535768005
Fold 42: 0.1182342072976645
Fold 43: 0.2177210209242003
Fold 44: 0.10044777665295146
Fold 45: 0.1335094684444286
Fold 46: 0.1638577914507682
Fold 47: 0.14507248177853596
Fold 48: 0.12227091581336277
Fold 49: 0.1274685266809319
Fold 0: 0.19016957108369933
Fold 1: 0.19836269986208024
Fold 2: 0.2916983891733796
Fold 3: 0.16676232923407389
Fold 4: 0.30485311914094976
Fold 5: 0.1792812540616421
Fold 6: 0.23584705328146696
Fold 7: 0.2291093533826114
Fold 8: 0.22689774553532308
Fold 9: 0.12019184733690465
Fold 10: 0.34282101016833205
Fold 11: 0.34680132809457725
Fold 12: 0.21699283384747897
Fold 13: 0.20320829183080866
Fold 14: 0.11775398861979734
Fold 15: 0.1499642055435913
Fold 16: 0.12277479939185074
Fold 17: 0.14738003170830807
Fold 18: 0.4232292703819887
Fold 19: 0.14473314171375604
Fold 20: 0.15236490811536738
Fold 21: 0.1574558891115942
Fold 22: 0.13918212541497155
Fold 23: 0.10048974201670283
Fold 24: 0.08096271242672236
Fold 25: 0.11668363268884943
Fold 26: 0.13457109783917073
Fold 27: 0.08754732899697915
Fold 28: 0.13149287672926713
Fold 29: 0.1353605339542366
Fold 30: 0.10705007557231615
Fold 31: 0.201961027801121
Fold 32: 0.10155237711099771
Fold 33: 0.20389259437416457
Fold 34: 0.16548913873345672
Fold 35: 0.5520709012477778
Fold 36: 0.5532889537858977
Fold 37: 0.10781378085546174
Fold 38: 0.09784326537723156
Fold 39: 0.1690014826178797
Fold 40: 0.10784534532973507
Fold 41: 0.09344880854502642
Fold 42: 0.12496001806079125
Fold 43: 0.1589123578924518
Fold 44: 0.11984639198723626
Fold 45: 0.13558499862180828
Fold 46: 0.07353474977215153
Fold 47: 0.09616785438579359
Fold 48: 0.09041406550152106
Fold 49: 0.08922383403795772
Fold 0: 0.23277967292263868
Fold 1: 0.2053294075733549
Fold 2: 0.13368923683193334
Fold 3: 0.1324912233196524
Fold 4: 0.09461958168235854
Fold 5: 0.16892314576235495
Fold 6: 0.092311313586262
Fold 7: 0.15531797648122375
Fold 8: 0.08808671776118943
Fold 9: 0.11812810448434297
Fold 10: 0.2749629568272081
Fold 11: 0.17863125114717676
Fold 12: 0.21421452386741832
Fold 13: 0.1618169792040209
Fold 14: 0.13457153279814865
Fold 15: 0.10919495497968114
Fold 16: 0.07609553624296364
Fold 17: 0.13812416802964697
Fold 18: 0.14088521440308963
Fold 19: 0.11679521447470811
Fold 20: 0.10583545843875808
Fold 21: 0.12466900091877199
Fold 22: 0.07962106142413333
Fold 23: 0.07785730703824212
Fold 24: 0.05929480868193304
Fold 25: 0.09604184051402845
Fold 26: 0.1095593410682934
Fold 27: 0.11202387353986523
Fold 28: 0.06662600014468092
Fold 29: 0.10877936504471669
Fold 30: 0.06294957533168014
Fold 31: 0.16284179613598898
Fold 32: 0.08493999976691859
Fold 33: 0.09037032216840118
Fold 34: 0.0749405010693913
Fold 35: 0.26657312011061757
Fold 36: 0.1626866762146903
Fold 37: 0.11649026257518613
Fold 38: 0.07912285058319905
Fold 39: 0.1824578397836465
Fold 40: 0.09159303689474808
Fold 41: 0.11514259951849544
Fold 42: 0.15788268966934413
Fold 43: 0.21871903352952027
Fold 44: 0.14153857840536652
Fold 45: 0.08498041777745882
Fold 46: 0.11846242676368764
Fold 47: 0.08264287927664525
Fold 48: 0.07053557660919187
Fold 49: 0.05776654006921019
Fold 0: 0.14676882720936246
Fold 1: 0.12926032062785817
Fold 2: 0.07305282386852643
Fold 3: 0.12003276409624346
Fold 4: 0.07148804234745552
Fold 5: 0.11906338951457067
Fold 6: 0.10114212201963488
Fold 7: 0.12923266595512956
Fold 8: 0.08487929751622264
Fold 9: 0.07142595094382848
Fold 10: 0.11980454684281672
Fold 11: 0.11406065072652767
Fold 12: 0.09383437624572191
Fold 13: 0.08684709715490146
Fold 14: 0.06300117230582095
Fold 15: 0.07472518080325108
Fold 16: 0.06778105765784254
Fold 17: 0.14851414574059205
Fold 18: 0.07940443684167832
Fold 19: 0.06255281894956251
Fold 20: 0.08956794731972346
Fold 21: 0.08654693350761607
Fold 22: 0.08148352708318897
Fold 23: 0.061399878158364854
Fold 24: 0.04284909404766232
Fold 25: 0.07438970139759521
Fold 26: 0.09450658683121003
Fold 27: 0.11204740629731756
Fold 28: 0.09421687106884592
Fold 29: 0.08610996526129618
Fold 30: 0.0677350646809543
Fold 31: 0.11034669665797942
Fold 32: 0.07580241849849646
Fold 33: 0.08792648240924764
Fold 34: 0.07476096361288674
Fold 35: 0.17465482659720388
Fold 36: 0.10146016927688815
Fold 37: 0.11164764988197472
Fold 38: 0.08375898211522077
Fold 39: 0.1146762205063722
Fold 40: 0.10802781214461696
Fold 41: 0.10873612817368286
Fold 42: 0.17023406448691852
Fold 43: 0.18225331014015764
Fold 44: 0.13113946602266327
Fold 45: 0.11134482024501899
Fold 46: 0.07856884763122905
Fold 47: 0.07606879565975826
Fold 48: 0.061427590924841964
Fold 49: 0.05349310856155506
Fold 0: 0.16075967078821551
Fold 1: 0.14063113727434712
Fold 2: 0.11723655416590198
Fold 3: 0.10506917734692611
Fold 4: 0.061790338374989436
Fold 5: 0.10487730444207025
Fold 6: 0.08164646609644467
Fold 7: 0.1416415206885019
Fold 8: 0.12080591740160444
Fold 9: 0.0876330177484617
Fold 10: 0.23643350108207833
Fold 11: 0.23040849423693133
Fold 12: 0.23184758216169887
Fold 13: 0.12174752382570221
Fold 14: 0.12070742144804092
Fold 15: 0.07458020720159726
Fold 16: 0.01994459142999946
Fold 17: 0.09396013254324974
Fold 18: 0.07181424750246816
Fold 19: 0.08321282023463557
Fold 20: 0.07234171938952882
Fold 21: 0.09749665877632921
Fold 22: 0.0716255376364503
Fold 23: 0.050774483367180795
Fold 24: 0.07788140413482207
Fold 25: 0.05992151329128814
Fold 26: 0.10507225285479338
Fold 27: 0.07781988262908168
Fold 28: 0.030292553057995948
Fold 29: 0.07312529852211179
Fold 30: 0.05584579566040117
Fold 31: 0.11471004691389636
Fold 32: 0.09578124450689435
Fold 33: 0.06930307024408149
Fold 34: 0.06723084603645268
Fold 35: 0.3070157523790263
Fold 36: 0.25705530770558555
Fold 37: 0.0903068692335478
Fold 38: 0.04760160100182369
Fold 39: 0.17800606258583207
Fold 40: 0.16146248384124104
Fold 41: 0.08924102171692515
Fold 42: 0.09483192135896326
Fold 43: 0.14811010185464046
Fold 44: 0.08284162895474793
Fold 45: 0.0744357519538117
Fold 46: 0.07887935864784824
Fold 47: 0.04277868352200634
Fold 48: 0.07476160520746356
Fold 49: 0.0369914113389086
Fold 0: 0.2044536753389803
Fold 1: 0.920981600535569
Fold 2: 0.8692971952988418
Fold 3: 0.6104561858016561
Fold 4: 1.558493384752343
Fold 5: 0.24350165531008194
Fold 6: 0.4204614973832924
Fold 7: 0.20428388361447206
Fold 8: 0.16464375019379315
Fold 9: 0.3159326009660113
Fold 10: 0.1872161571736587
Fold 11: 0.32414298248024426
Fold 12: 0.21452847056487148
Fold 13: 0.19826554304217223
Fold 14: 0.20477835011049714
Fold 15: 0.22622100510969853
Fold 16: 0.15086352561804575
Fold 17: 0.24089362452027058
Fold 18: 0.5797602078660539
Fold 19: 1.068868056022502
Fold 20: 1.096700538272871
Fold 21: 0.3358651452188905
Fold 22: 0.3200785344824208
Fold 23: 0.1396825388034676
Fold 24: 0.15275619651185457
Fold 25: 0.40239525906263485
Fold 26: 0.49415821902121926
Fold 27: 0.38322620573815563
Fold 28: 1.1278224304034505
Fold 29: 0.23687500688448898
Fold 30: 0.11356971723286584
Fold 31: 0.13851258054199816
Fold 32: 0.1375053500688755
Fold 33: 0.1528136070064695
Fold 34: 0.13113182851898658
Fold 35: 0.29410711991247357
Fold 36: 0.35480376257355717
Fold 37: 0.10468602237268769
Fold 38: 0.13401681344437144
Fold 39: 0.216779931641552
Fold 40: 0.259017648347356
Fold 41: 1.4398084000461182
Fold 42: 0.48858270964713757
Fold 43: 0.34816796229678265
Fold 44: 0.30829942938423477
Fold 45: 0.1801799028282714
Fold 46: 0.21920080174818357
Fold 47: 0.16296385822429332
Fold 48: 0.16066852361153236
Fold 49: 1.3858766099776372
Fold 0: 0.24805154708051222
Fold 1: 0.2069776315455485
Fold 2: 0.20316488171921257
Fold 3: 0.2449274648235673
Fold 4: 0.10644638159567449
Fold 5: 0.17593783520374212
Fold 6: 0.0937973988551976
Fold 7: 0.17188744478829157
Fold 8: 0.131721934302042
Fold 9: 0.10370782422870461
Fold 10: 0.16488654250417162
Fold 11: 0.169918590289776
Fold 12: 0.1157660389571202
Fold 13: 0.15565788222860358
Fold 14: 0.1515443060916206
Fold 15: 0.10307056325368594
Fold 16: 0.08299309813762562
Fold 17: 0.17007183274380244
Fold 18: 0.09702173838199288
Fold 19: 0.08876232170383389
Fold 20: 0.12915263805781746
Fold 21: 0.09837096759191713
Fold 22: 0.10076350237267591
Fold 23: 0.08683063876774189
Fold 24: 0.055493895667388465
Fold 25: 0.08146920498581747
Fold 26: 0.09482432861385547
Fold 27: 0.13881988484765892
Fold 28: 0.08327948936576153
Fold 29: 0.07834084362645567
Fold 30: 0.1012355903245065
Fold 31: 0.10915037021470961
Fold 32: 0.12334567996520546
Fold 33: 0.27070838109357015
Fold 34: 0.22848019718489382
Fold 35: 0.23282356118527475
Fold 36: 0.2063797750335729
Fold 37: 0.10655921809817395
Fold 38: 0.06975464640970568
Fold 39: 0.18842619060800664
Fold 40: 0.19961759498499831
Fold 41: 0.14760659699243514
Fold 42: 0.16121458005547434
Fold 43: 0.24362325715724567
Fold 44: 0.11837917011523992
Fold 45: 0.1114982922256855
Fold 46: 0.22364095261074576
Fold 47: 0.13475127767312509
Fold 48: 0.12540615621697024
Fold 49: 0.09011393517509567
69.33421986708696
print(cv_multivariate_params(y_train_log1p, tuned_params, cv=cv, fh=fh, metric=rmse))
It looks like our best performing model after some tuning is an LGBM model using the sktime make_reduction wrapper. This achieved our lowest cv RMSE score so far of 69.3342.
Lets see how it does on prediction of the holdout test set
y_pred_df = pd.DataFrame(index=y_test.index)
result = []
for col in y_train:
regressor = LGBMRegressor(n_estimators=tuned_params[col]['estimator__n_estimators'],
max_depth=tuned_params[col]['estimator__max_depth'])
forecaster = make_reduction(regressor, window_length=tuned_params[col]['window_length'])
forecaster.fit(y_train_log1p[col])
y_pred = forecaster.predict(fh)
result.append(rmse(y_test[col], np.expm1(y_pred)))
y_pred_df = pd.concat([y_pred_df, pd.DataFrame(np.expm1(y_pred), columns=[col])], axis=1)
print(np.mean(result))
68.62324729799685
for col in y_test.columns:
plot_series(y_test[col], y_pred_df[col])
test_stat, pvalue = diebold_mariano_test(y_pred,y_pred_df, y_test, 4)
#print(np.mean(test_stat))
#print(np.mean(pvalue))
print('Diebold-Mariano Test Statistic : ',np.mean(test_stat))
print('2 tailed test p-value : ', np.mean(pvalue))
Diebold-Mariano Test Statistic : 36.12321837399342 2 tailed test p-value : 0.0151827887020265
The predictions are definitely improving! We can see from the results of the Diebold-Mariano test that the difference in predictive accuracy from the original NaiveForecaster we tried and this tuned LGBM model is statistically significant at alpha = .05
The Null Hypothesis of the DM test states that there is no significant different in the accuracy of the two forecasts. Because the p-value is so low, we can reject the null hypothesis and conclude that these forecasts are significantly difference in their predictive accuracy.